Last Update: 2025/3/26
SenseFlow Conversation API
The SenseFlow Conversation API allows you to mannage user messages.
Endpoints
Message Feedback
POST https://platform.llmprovider.ai/v1/agent/messages/:message_id/feedbacks
Submit user feedback (likes/dislikes) for messages to help developers optimize output.
Path Parameters
Parameter | Type | Description |
---|---|---|
message_id | string | Message ID |
Request Headers
Header | Value |
---|---|
Authorization | Bearer YOUR_API_KEY |
Content-Type | application/json |
Request Body Parameters
Parameter | Type | Description |
---|---|---|
model | string | agent name |
rating | string | Feedback type: "like", "dislike", or null |
user | string | User identifier |
content | string | Optional feedback details |
Response
Field | Type | Description |
---|---|---|
result | string | result |
Example Response
{
"result": "success"
}
Example Request
- Shell
- Python
- Node.js
curl -X POST 'https://platform.llmprovider.ai/v1/agent/messages/msg_123/feedbacks' \
--header 'Authorization: Bearer $YOUR_API_KEY' \
--header 'Content-Type: application/json' \
--data-raw '{
"model": "",
"rating": "like",
"user": "abc-123",
"content": "This response was helpful"
}'
import requests
import json
api_key = 'YOUR_API_KEY'
url = 'https://platform.llmprovider.ai/v1/agent/messages/msg_123/feedbacks'
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}
data = {
'model': '',
'rating': 'like',
'user': 'abc-123',
'content': 'This response was helpful'
}
response = requests.post(url, headers=headers, json=data)
print(response.json())
const axios = require('axios');
const apiKey = 'YOUR_API_KEY';
const url = 'https://platform.llmprovider.ai/v1/agent/messages/msg_123/feedbacks';
const data = {
model: '',
rating: 'like',
user: 'abc-123',
content: 'This response was helpful'
};
const headers = {
'Authorization': `Bearer ${apiKey}`,
'Content-Type': 'application/json'
};
axios.post(url, data, {headers})
.then(response => console.log(response.data))
.catch(error => console.error(error));
Get Next Suggested Questions
GET https://platform.llmprovider.ai/v1/agent/messages/{message_id}/suggested
Get suggested follow-up questions for a message.
Request Headers
Header | Value |
---|---|
Authorization | Bearer YOUR_API_KEY |
Path Parameters
Parameter | Type | Description |
---|---|---|
message_id | string | Message ID |
Query Parameters
Parameter | Type | Description |
---|---|---|
model | string | agent name |
user | string | End user identifier |
Response
Field | Type | Description |
---|---|---|
result | string | Operation status ("success") |
data | array[string] | List of suggested questions |
Example Request
- Shell
- Python
- Node.js
curl -X GET 'https://platform.llmprovider.ai/v1/agent/messages/{message_id}/suggested?model=&user=abc-123' \
--header 'Authorization: Bearer YOUR_API_KEY'
import requests
api_key = 'YOUR_API_KEY'
url = 'https://platform.llmprovider.ai/v1/agent/messages/{message_id}/suggested'
headers = {
'Authorization': f'Bearer {api_key}'
}
params = {
'model': '',
'user': 'abc-123'
}
response = requests.get(url, headers=headers, params=params)
print(response.json())
const axios = require('axios');
const apiKey = 'YOUR_API_KEY';
const url = 'https://platform.llmprovider.ai/v1/agent/messages/{message_id}/suggested';
const params = {
model: '',
user: 'abc-123'
};
const headers = {
'Authorization': `Bearer ${apiKey}`
};
axios.get(url, {headers, params})
.then(response => console.log(response.data))
.catch(error => console.error(error));
Example Response
{
"result": "success",
"data": [
"Tell me more about feature A",
"How does feature B work?",
"Can you explain feature C?"
]
}
Get Conversation Message History
GET https://platform.llmprovider.ai/agent/v1/messages
Retrieve chat history messages in paginated form.
Request Headers
Header | Value |
---|---|
Authorization | Bearer YOUR_API_KEY |
Query Parameters
Parameter | Type | Description |
---|---|---|
model | string | Agent Name |
conversation_id | string | Conversation ID |
user | string | End user identifier |
first_id | string | (Optional) ID of first message on current page |
limit | int | (Optional) Number of messages to return (default 20) |
Response
Field | Type | Description |
---|---|---|
data | array[Message] | Array of message objects |
has_more | boolean | Whether more messages are available |
limit | integer | Number of messages returned |
Message Object
Field | Type | Description |
---|---|---|
id | string | Message ID |
conversation_id | string | Conversation ID |
inputs | object | User input parameters |
query | string | User input/question |
message_files | array | Array of file objects |
answer | string | Response content |
created_at | integer | Creation timestamp |
feedback | object | User feedback information |
Example Request
- Shell
- Python
- Node.js
curl -X GET 'https://platform.llmprovider.ai/v1/agent/messages?model=&user=abc-123&conversation_id=conv_12345' \
--header 'Authorization: Bearer $YOUR_API_KEY'
import requests
api_key = 'YOUR_API_KEY'
url = 'https://platform.llmprovider.ai/v1/agent/messages'
headers = {
'Authorization': f'Bearer {api_key}'
}
params = {
'model': '',
'user': 'abc-123',
'conversation_id': 'conv_12345'
}
response = requests.get(url, headers=headers, params=params)
print(response.json())
const axios = require('axios');
const apiKey = 'YOUR_API_KEY';
const url = 'https://platform.llmprovider.ai/v1/agent/messages';
const params = {
model: '',
user: 'abc-123',
conversation_id: 'conv_12345'
};
const headers = {
'Authorization': `Bearer ${apiKey}`
};
axios.get(url, {headers, params})
.then(response => console.log(response.data))
.catch(error => console.error(error));
Example Response
{
"limit": 20,
"has_more": false,
"data": [
{
"id": "msg_12345",
"conversation_id": "conv_12345",
"inputs": {},
"query": "Hello",
"answer": "Hi! How can I help you today?",
"message_files": [],
"feedback": null,
"created_at": 1705569239
}
]
}